Root Zanli
Home
Console
Upload
information
Create File
Create Folder
About
Tools
:
/
home
/
o5t6x7pgljbm
/
public_html
/
admin
/
app
/
V2
/
BulkOperations
/
ImportServices
/
Filename :
TempTableService.php
back
Copy
<?php namespace App\V2\BulkOperations\ImportServices; use App\Libraries\Helpers; use App\V2\BulkOperations\ImportFields\DataImportField; use Exception; use Illuminate\Support\Facades\DB; use Illuminate\Support\Facades\Log; use League\Csv\Query\QueryException; use League\Csv\Reader; use Illuminate\Support\Str; class TempTableService { public function createTempTable($fields, $tempTableName) { $dropSql = "DROP TABLE IF EXISTS {$tempTableName}"; DB::statement($dropSql); // Start building the SQL for creating a temporary table $sql = "CREATE TABLE IF NOT EXISTS {$tempTableName} ("; // Add each field's definition to the SQL $fieldDefinitions = []; foreach ($fields as $field) { $fieldDefinitions[] = " {$field->getColumnName()} {$field->getDbDataType()} " . ($field->getDefaultValue() != null ? " DEFAULT {$field->getDefaultValue()} " : ''); } $sql .= ' id int PRIMARY KEY NOT NULL AUTO_INCREMENT, '; // Join the field definitions with commas and add to the SQL $sql .= implode(', ', $fieldDefinitions); // Close the SQL statement $sql .= ") ENGINE=InnoDB;"; // Execute the SQL statement to create the temporary table DB::statement($sql); } public function insertRecords($tempTableName, $csvFilePath, $fields){ // Open the CSV file $csv = Reader::createFromPath($csvFilePath, 'r'); $csv->setHeaderOffset(0); // Set the header offset to 0 to use the first row as the header // Get the header from the CSV file $header = $csv->getHeader(); // Begin a transaction DB::beginTransaction(); try { // Iterate over the records in the CSV file foreach ($csv->getRecords() as $record) { // Map the CSV data to the database fields $data = []; foreach ($fields as $field) { $columnName = $field->getColumnName(); $csvName = $field->getCsvName(); // If the CSV column exists in the record, add it to the data array if (isset($record[$csvName])) { $data[$columnName] = in_array(strtolower($record[$csvName]), ['null', '']) ? null : $record[$csvName]; } else if($field->getAutoGenerateType() != null) { $data[$columnName] = $this->getColumnDefaultValue($field); } else if($field->getTransformer() != null){ $transformer = $field->getTransformer(); $source_column_names = $transformer->getSourceDataImportFieldColumnNames(); $source_values = $this->getFieldValuesFromCsvRecord(array_merge($record, $data), $source_column_names); try{ $data[$columnName] = $transformer->getTransformedValue($source_values); } catch(Exception $e){ $data[$columnName] = null; $data['validate_errors'] = (isset($data['validate_errors']) ? $data['validate_errors'] .', ' : '') . $e->getMessage(); $data['validate_is_valid'] = 0; } } } // Insert the record into the temporary table DB::table($tempTableName)->insert($data); } // Commit the transaction DB::commit(); } catch (QueryException $e) { // Rollback the transaction on error DB::rollBack(); throw $e; } catch (\Exception $e) { // Rollback the transaction on error DB::rollBack(); throw $e; } } private function getColumnDefaultValue(DataImportField $field){ $default_value = null; switch($field->getAutoGenerateType()){ case DataImportField::AUTO_GENERATE_TYPE_INT_UUID: $default_value = Helpers::getNumericUUID(); break; case DataImportField::AUTO_GENERATE_TYPE_STR_UUID: $default_value = Helpers::getStringUUID(); break; case DataImportField::AUTO_GENERATE_TYPE_API_TOKEN: $default_value = Str::random(60); break; default: $default_value = null; break; } return $default_value; } private function getFieldValuesFromCsvRecord($record, $column_names){ $column_values = []; foreach($column_names AS $column_name){ $column_values[] = [ 'field_name' => $column_name, 'field_value' => $record[$column_name] ?? '' ]; } return $column_values; } public function getSampleRecords($tempTableName) { // Logic to get sample records from temporary table } public function validateData($tempTableName) { // Logic to validate data in the temporary table // For demonstration purposes, we'll assume validation is simply checking for null values in required fields // Define a simple validation query to check for null values in required fields // This example assumes all required fields have "_required" in their name $validationQuery = "SELECT * FROM {$tempTableName} WHERE "; $requiredFields = $this->getRequiredFields($tempTableName); $conditions = []; foreach ($requiredFields as $field) { $conditions[] = "{$field} IS NULL"; } // Join conditions with OR and complete the query $validationQuery .= implode(' OR ', $conditions); // Execute the validation query $invalidRecords = DB::select($validationQuery); // Return true if no invalid records, false otherwise return empty($invalidRecords); } private function getRequiredFields($tempTableName) { // This is a placeholder method to retrieve required fields for validation // In a real implementation, this could fetch metadata from the import profile or another source // For demonstration purposes, return a static array of required fields return ['full_name', 'email', 'mobile', 'password', 'user_role', 'group_uuid', 'country_name', 'zipcode']; } public function executeUpdateInBatches($updateSql, $primaryKey, $batchSize, $totalRecords) { // Calculate the total number of batches needed $totalBatches = ceil($totalRecords / $batchSize); // Initialize the starting point for the primary key $startId = 0; for ($batch = 0; $batch < $totalBatches; $batch++) { // Calculate the end ID for the current batch $endId = $startId + $batchSize; // Modify the update SQL to include the batching condition $batchedUpdateSql = $updateSql . " AND $primaryKey >= $startId AND $primaryKey < $endId"; // Run the update query for the current batch DB::statement($batchedUpdateSql); // Update the start ID for the next batch $startId = $endId; } } public function verifyAndUpdateInBatch($selectSql, $primaryKey, $batchSize, $totalRecords, $updateTable, $updateData, callable $conditionFunction){ // Calculate the total number of batches needed $totalBatches = ceil($totalRecords / $batchSize); // Initialize the starting point for the primary key $startId = 0; for ($batch = 0; $batch < $totalBatches; $batch++) { Log::debug("Running batch #$batch"); // Calculate the end ID for the current batch $endId = $startId + $batchSize; // Modify the update SQL to include the batching condition $sql = $selectSql . " AND $primaryKey >= $startId AND $primaryKey < $endId"; // Run the update query for the current batch $records = DB::select($sql); $idsToUpdate = []; foreach($records AS $record){ if (!$conditionFunction($record)) { // Collect the ID if it doesn't match the condition $idsToUpdate[] = $record->{$primaryKey}; } } if(count($idsToUpdate) > 0){ DB::table($updateTable) ->whereIn($primaryKey, $idsToUpdate) ->update($updateData); } // Update the start ID for the next batch $startId = $endId; } } } ?>